In [5]:
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression,SGDRegressor
from sklearn.preprocessing import PolynomialFeatures,StandardScaler
from sklearn.metrics import r2_score
from sklearn.pipeline import Pipeline
In [7]:
X = 6*np.random.rand(200, 1) - 3
y = 0.8*X**2 + 0.9*X+2+np.random.randn(200,1)
# y = 0.8x^2 + 0.9x + 2
plt.plot(X,y,'b.')
plt.xlabel("X")
plt.ylabel("y")
plt.show()
In [12]:
# Train test split
X_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.2,random_state=2)
In [13]:
# applying linear regression
lr = LinearRegression()
In [15]:
lr.fit(X_train,y_train)
Out[15]:
LinearRegression()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LinearRegression()
In [16]:
y_pred = lr.predict(X_test)
r2_score(y_test,y_pred)
Out[16]:
0.036128825868604886
In [17]:
plt.plot(X_train,lr.predict(X_train),color='r')
plt.plot(X, y, "b.")
plt.xlabel("X")
plt.ylabel("y")
plt.show()
In [18]:
# Applying Polynomial Linear Regression
# degree 2
poly = PolynomialFeatures(degree=2,include_bias=True)
X_train_trans = poly.fit_transform(X_train)
X_test_trans = poly.transform(X_test)
In [19]:
print(X_train[0])
print(X_train_trans[0])
[-1.01036874] [ 1. -1.01036874 1.02084498]
In [20]:
# include_bias parameter
lr = LinearRegression()
lr.fit(X_train_trans,y_train)
Out[20]:
LinearRegression()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LinearRegression()
In [21]:
y_pred = lr.predict(X_test_trans)
In [22]:
r2_score(y_test,y_pred)
Out[22]:
0.8588711624388841
In [23]:
print(lr.coef_)
print(lr.intercept_)
[[0. 0.89724437 0.78137739]] [2.11683801]
In [24]:
X_new=np.linspace(-3, 3, 200).reshape(200, 1)
X_new_poly = poly.transform(X_new)
y_new = lr.predict(X_new_poly)
In [25]:
plt.plot(X_new, y_new, "r-", linewidth=2, label="Predictions")
plt.plot(X_train, y_train, "b.",label='Training points')
plt.plot(X_test, y_test, "g.",label='Testing points')
plt.xlabel("X")
plt.ylabel("y")
plt.legend()
plt.show()
In [26]:
def polynomial_regression(degree):
X_new=np.linspace(-3, 3, 100).reshape(100, 1)
X_new_poly = poly.transform(X_new)
polybig_features = PolynomialFeatures(degree=degree, include_bias=False)
std_scaler = StandardScaler()
lin_reg = LinearRegression()
polynomial_regression = Pipeline([
("poly_features", polybig_features),
("std_scaler", std_scaler),
("lin_reg", lin_reg),
])
polynomial_regression.fit(X, y)
y_newbig = polynomial_regression.predict(X_new)
plt.plot(X_new, y_newbig,'r', label="Degree " + str(degree), linewidth=2)
plt.plot(X_train, y_train, "b.", linewidth=3)
plt.plot(X_test, y_test, "g.", linewidth=3)
plt.legend(loc="upper left")
plt.xlabel("X")
plt.ylabel("y")
plt.axis([-3, 3, 0, 10])
plt.show()
In [27]:
polynomial_regression(350)
D:\New folder\jupyter\Lib\site-packages\sklearn\utils\extmath.py:1066: RuntimeWarning: overflow encountered in square temp **= 2 D:\New folder\jupyter\Lib\site-packages\numpy\core\fromnumeric.py:88: RuntimeWarning: overflow encountered in reduce return ufunc.reduce(obj, axis, dtype, out, **passkwargs)
In [28]:
poly.powers_
Out[28]:
array([[0],
[1],
[2]], dtype=int64)
In [29]:
# Applying Gradient Descent
poly = PolynomialFeatures(degree=2)
X_train_trans = poly.fit_transform(X_train)
X_test_trans = poly.transform(X_test)
sgd = SGDRegressor(max_iter=100)
sgd.fit(X_train_trans,y_train)
X_new=np.linspace(-2.9, 2.8, 200).reshape(200, 1)
X_new_poly = poly.transform(X_new)
y_new = sgd.predict(X_new_poly)
y_pred = sgd.predict(X_test_trans)
plt.plot(X_new, y_new, "r-", linewidth=2, label="Predictions " + str(round(r2_score(y_test,y_pred),2)))
plt.plot(X_train, y_train, "b.",label='Training points')
plt.plot(X_test, y_test, "g.",label='Testing points')
plt.xlabel("X")
plt.ylabel("y")
plt.legend()
plt.show()
D:\New folder\jupyter\Lib\site-packages\sklearn\utils\validation.py:1143: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel(). y = column_or_1d(y, warn=True)
In [30]:
# 3D polynomial regression
x = 7 * np.random.rand(100, 1) - 2.8
y = 7 * np.random.rand(100, 1) - 2.8
z = x**2 + y**2 + 0.2*x + 0.2*y + 0.1*x*y +2 + np.random.randn(100, 1)
# z = x^2 + y^2 + 0.2x + 0.2y + 0.1xy + 2
In [31]:
import plotly.express as px
df = px.data.iris()
fig = px.scatter_3d(df, x=x.ravel(), y=y.ravel(), z=z.ravel())
fig.show()
In [33]:
lr = LinearRegression()
lr.fit(np.array([x,y]).reshape(100,2),z)
x_input = np.linspace(x.min(), x.max(), 10)
y_input = np.linspace(y.min(), y.max(), 10)
xGrid, yGrid = np.meshgrid(x_input,y_input)
final = np.vstack((xGrid.ravel().reshape(1,100),yGrid.ravel().reshape(1,100))).T
z_final = lr.predict(final).reshape(10,10)
In [34]:
import plotly.graph_objects as go
fig = px.scatter_3d(df, x=x.ravel(), y=y.ravel(), z=z.ravel())
fig.add_trace(go.Surface(x = x_input, y = y_input, z =z_final ))
fig.show()
In [41]:
X_multi = np.array([x,y]).reshape(100,2)
X_multi.shape
Out[41]:
(100, 2)
In [60]:
poly = PolynomialFeatures(degree=30)
X_multi_trans = poly.fit_transform(X_multi)
In [56]:
lr = LinearRegression()
lr.fit(X_multi_trans,z)
Out[56]:
LinearRegression()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LinearRegression()
In [57]:
X_test_multi = poly.transform(final)
In [58]:
z_final = lr.predict(X_multi_trans).reshape(10,10)
In [59]:
fig = px.scatter_3d(x=x.ravel(), y=y.ravel(), z=z.ravel())
fig.add_trace(go.Surface(x = x_input, y = y_input, z =z_final))
fig.update_layout(scene = dict(zaxis = dict(range=[0,35])))
fig.show()
In [ ]: